import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
from PIL import Image
import re
import matplotlib.patches as patches
!pip install ultralytics -qq
from ultralytics import YOLO
import os
import yaml
from sklearn.model_selection import train_test_split
train=pd.read_csv('/kaggle/input/Train (11).csv')
# normalizes the ToothClass values into a smaller
# 11-18 becomes 0-7
# 21-28 becomes 8-15
# 31-38 becomes 16-23
# 41-48 becomes 24à
train.loc[train.ToothClass.isin(range(11,19)),'ToothClass']=train.loc[train.ToothClass.isin(range(11,19)),'ToothClass'].map(lambda x:x-11)
train.loc[train.ToothClass.isin(range(21,29)),'ToothClass']=train.loc[train.ToothClass.isin(range(21,29)),'ToothClass'].map(lambda x:x-13)
train.loc[train.ToothClass.isin(range(31,39)),'ToothClass']=train.loc[train.ToothClass.isin(range(31,39)),'ToothClass'].map(lambda x:x-15)
train.loc[train.ToothClass.isin(range(41,49)),'ToothClass']=train.loc[train.ToothClass.isin(range(41,49)),'ToothClass'].map(lambda x:x-17)
def conv(x):
# """conv Function: Converts a numeric ID to a zero-padded string and creates a full file path string."""
a=(6-len(str(x)))*'0'+str(x)
return f'/kaggle/input/train (1)/train/ID_{a}.png'
def cmpt_id(x):
# '''Converts a numeric ID to a zero-padded string prefixed with 'ID_'''
a=(6-len(str(x)))*'0'+str(x)
return f'ID_{a}'
train['file_path']=train['Image_ID'].map(conv)
random_seed = 42
np.random.seed(random_seed)
unique_image_ids = train['Image_ID'].unique()
shuffled_indices = np.random.permutation(len(unique_image_ids))
unique_image_ids = unique_image_ids[shuffled_indices]
unique_image_ids
array([1178, 865, 101, ..., 1130, 860, 1126])
num_validation = int(0.1* len(unique_image_ids))
validation_ids = unique_image_ids[:num_validation]
training_ids = unique_image_ids[num_validation:]
val_df =train[train['Image_ID'].isin(validation_ids)]
train_df =train[train['Image_ID'].isin(training_ids)]
train_df.reset_index(drop=True)
val_df.reset_index(drop=True)
| Image_ID | ToothClass | Xmin | Ymin | Xmax | Ymax | file_path | |
|---|---|---|---|---|---|---|---|
| 0 | 23 | 25 | 189 | 362 | 231 | 402 | /kaggle/input/train (1)/train/ID_000023.png |
| 1 | 23 | 29 | 82 | 175 | 156 | 254 | /kaggle/input/train (1)/train/ID_000023.png |
| 2 | 23 | 21 | 358 | 169 | 432 | 249 | /kaggle/input/train (1)/train/ID_000023.png |
| 3 | 23 | 17 | 294 | 361 | 335 | 401 | /kaggle/input/train (1)/train/ID_000023.png |
| 4 | 23 | 27 | 127 | 297 | 179 | 347 | /kaggle/input/train (1)/train/ID_000023.png |
| ... | ... | ... | ... | ... | ... | ... | ... |
| 1583 | 1191 | 10 | 129 | 285 | 168 | 335 | /kaggle/input/train (1)/train/ID_001191.png |
| 1584 | 1191 | 3 | 358 | 237 | 411 | 293 | /kaggle/input/train (1)/train/ID_001191.png |
| 1585 | 1191 | 12 | 74 | 179 | 140 | 252 | /kaggle/input/train (1)/train/ID_001191.png |
| 1586 | 1191 | 2 | 345 | 282 | 384 | 331 | /kaggle/input/train (1)/train/ID_001191.png |
| 1587 | 1191 | 1 | 316 | 330 | 364 | 370 | /kaggle/input/train (1)/train/ID_001191.png |
1588 rows × 7 columns
len(train_df.Image_ID.unique())
1080
train_df['Image_ID']=train_df['Image_ID'].map(cmpt_id)
val_df['Image_ID']=val_df['Image_ID'].map(cmpt_id)
/tmp/ipykernel_34/2645421140.py:1: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train_df['Image_ID']=train_df['Image_ID'].map(cmpt_id) /tmp/ipykernel_34/2645421140.py:2: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy val_df['Image_ID']=val_df['Image_ID'].map(cmpt_id)
import shutil
copy_from_folder = "/kaggle/input/train (1)/train"
def create_data_folder(destination_folder, df):
if not os.path.exists(destination_folder):
os.makedirs(destination_folder)
for img_path in df['file_path'].unique():
shutil.copy(img_path, destination_folder)
train_destination_folder = "/kaggle/working/train/images"
val_destination_folder = "/kaggle/working/val/images"
create_data_folder(train_destination_folder, train_df)
create_data_folder(val_destination_folder, val_df)
from tqdm import tqdm
train
| Image_ID | ToothClass | Xmin | Ymin | Xmax | Ymax | file_path | |
|---|---|---|---|---|---|---|---|
| 0 | 0 | 18 | 284 | 315 | 325 | 361 | /kaggle/input/train (1)/train/ID_000000.png |
| 1 | 0 | 22 | 372 | 119 | 448 | 193 | /kaggle/input/train (1)/train/ID_000000.png |
| 2 | 0 | 30 | 40 | 114 | 113 | 186 | /kaggle/input/train (1)/train/ID_000000.png |
| 3 | 0 | 20 | 325 | 236 | 379 | 287 | /kaggle/input/train (1)/train/ID_000000.png |
| 4 | 0 | 24 | 200 | 362 | 237 | 411 | /kaggle/input/train (1)/train/ID_000000.png |
| ... | ... | ... | ... | ... | ... | ... | ... |
| 16008 | 1199 | 2 | 337 | 318 | 388 | 366 | /kaggle/input/train (1)/train/ID_001199.png |
| 16009 | 1199 | 1 | 309 | 339 | 349 | 389 | /kaggle/input/train (1)/train/ID_001199.png |
| 16010 | 1199 | 9 | 177 | 352 | 218 | 400 | /kaggle/input/train (1)/train/ID_001199.png |
| 16011 | 1199 | 11 | 111 | 288 | 174 | 343 | /kaggle/input/train (1)/train/ID_001199.png |
| 16012 | 1199 | 10 | 137 | 330 | 188 | 381 | /kaggle/input/train (1)/train/ID_001199.png |
16013 rows × 7 columns
def convert_bbox_to_string(bbox):
xmin, ymin, xmax, ymax = bbox
#Calculate the Center X and Y Coordinates
x_center = (xmin + xmax) /1024
y_center = (ymin + ymax) /1024
#Calculate the Width and Height
width = (xmax - xmin)/512
height = (ymax - ymin)/512
#Format the Box Values
box = [x_center, y_center,width,height]
box = [f"{i:.4g}" for i in box]
return " ".join(box)
def create_label_folder(label_dir, dataframe):
"""This function creates label files for object detection tasks.
Each label file contains annotations for a specific image, including class IDs and bounding box coordinates."""
if not os.path.exists(label_dir):
os.makedirs(label_dir)
unique_images = dataframe['Image_ID'].unique()
for i,img in zip(tqdm(range(len(unique_images))), unique_images):
df = dataframe[dataframe['Image_ID'] == img]
output_lines = []
for index, row in df.iterrows():
category_id = int(row['ToothClass'])
bbox_string = convert_bbox_to_string(row[['Xmin','Ymin','Xmax','Ymax']])
output_lines.append(f'{category_id} {bbox_string}')
with open(f"{label_dir}/{img}.txt", 'w') as f:
for line in output_lines:
f.write(line + '\n')
create_label_folder("/kaggle/working/train/labels", train_df)
create_label_folder("/kaggle/working/val/labels", val_df)
100%|██████████| 1080/1080 [00:10<00:00, 101.56it/s] 100%|██████████| 120/120 [00:00<00:00, 137.56it/s]
num_classes = 32
names = []
for i in range(32):
names.append(f'tooth{i}')
train_dest_dir = "/kaggle/working/train"
val_dest_dir = "/kaggle/working/val"
dict_file = {
'train': train_dest_dir,
'val' : val_dest_dir,
'nc': num_classes,
'names': names
}
with open("/kaggle/working/data.yaml", 'w+') as file:
yaml.dump(dict_file, file)
### read yaml file created
def read_yaml_file(file_path):
with open(file_path, 'r') as file:
try:
data = yaml.safe_load(file)
return data
except yaml.YAMLError as e:
print("Error reading YAML:", e)
return None
### print it with newlines
def print_yaml_data(data):
formatted_yaml = yaml.dump(data, default_style=False)
print(formatted_yaml)
yaml_data = read_yaml_file("/kaggle/working/data.yaml")
if yaml_data:
print_yaml_data(yaml_data)
names: - tooth0 - tooth1 - tooth2 - tooth3 - tooth4 - tooth5 - tooth6 - tooth7 - tooth8 - tooth9 - tooth10 - tooth11 - tooth12 - tooth13 - tooth14 - tooth15 - tooth16 - tooth17 - tooth18 - tooth19 - tooth20 - tooth21 - tooth22 - tooth23 - tooth24 - tooth25 - tooth26 - tooth27 - tooth28 - tooth29 - tooth30 - tooth31 nc: 32 train: /kaggle/working/train val: /kaggle/working/val
#configure weights and biases
import wandb
try:
from kaggle_secrets import UserSecretsClient
user_secrets = UserSecretsClient()
# api_key = user_secrets.get_secret("wandb_api")
wandb.login(key='b9fde7760be6d0b70d2f0d4c1b3acce034808c13')
anony = None
except:
anony = "must"
print('If you want to use your W&B account, go to Add-ons -> Secrets and provide your W&B access token. Use the Label name as wandb_api. \nGet your W&B access token from here: https://wandb.ai/authorize')
wandb: W&B API key is configured. Use `wandb login --relogin` to force relogin wandb: WARNING If you're specifying your api key in code, ensure this code is not shared publicly. wandb: WARNING Consider setting the WANDB_API_KEY environment variable, or running `wandb login` from the command line. wandb: Appending key for api.wandb.ai to your netrc file: /root/.netrc
from ultralytics import YOLO
model = YOLO('yolov8n.pt')
Downloading https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt to 'yolov8n.pt'...
100%|██████████| 6.23M/6.23M [00:00<00:00, 77.1MB/s]
EPOCHS = 30
BATCH_SIZE = 32
OPTIMIZER ='AdamW'
SEED = 42
NAME = f"Arm_Yolo_{EPOCHS}"
DEVICE = 'cuda'
VERBOSE = False
RESUME = False
PATIENCE = 10
%%time
### train
model.train(
data = "/kaggle/working/data.yaml",
task = 'detect',
imgsz = (512,512),
epochs = EPOCHS,
batch = BATCH_SIZE,
optimizer = OPTIMIZER,
patience = PATIENCE,
name = NAME,
seed = SEED,
val = True,
resume = RESUME,
device = DEVICE,
fliplr=0.0,
label_smoothing=0.05,
close_mosaic=12,
cos_lr=True,
degrees=15,
verbose = VERBOSE
)
Ultralytics YOLOv8.2.18 🚀 Python-3.10.13 torch-2.1.2 CUDA:0 (Tesla P100-PCIE-16GB, 16276MiB)
engine/trainer: task=detect, mode=train, model=yolov8n.pt, data=/kaggle/working/data.yaml, epochs=30, time=None, patience=10, batch=32, imgsz=(512, 512), save=True, save_period=-1, cache=False, device=cuda, workers=8, project=None, name=Arm_Yolo_30, exist_ok=False, pretrained=True, optimizer=AdamW, verbose=False, seed=42, deterministic=True, single_cls=False, rect=False, cos_lr=True, close_mosaic=12, resume=False, amp=True, fraction=1.0, profile=False, freeze=None, multi_scale=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, vid_stride=1, stream_buffer=False, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, embed=None, show=False, save_frames=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, show_boxes=True, line_width=None, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.05, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=15, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.0, bgr=0.0, mosaic=1.0, mixup=0.0, copy_paste=0.0, auto_augment=randaugment, erasing=0.4, crop_fraction=1.0, cfg=None, tracker=botsort.yaml, save_dir=runs/detect/Arm_Yolo_30
Downloading https://ultralytics.com/assets/Arial.ttf to '/root/.config/Ultralytics/Arial.ttf'...
100%|██████████| 755k/755k [00:00<00:00, 14.5MB/s] 2024-05-19 07:23:24,041 INFO util.py:124 -- Outdated packages: ipywidgets==7.7.1 found, needs ipywidgets>=8 Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output. 2024-05-19 07:23:24,849 INFO util.py:124 -- Outdated packages: ipywidgets==7.7.1 found, needs ipywidgets>=8 Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.
Overriding model.yaml nc=80 with nc=32
from n params module arguments
0 -1 1 464 ultralytics.nn.modules.conv.Conv [3, 16, 3, 2]
1 -1 1 4672 ultralytics.nn.modules.conv.Conv [16, 32, 3, 2]
2 -1 1 7360 ultralytics.nn.modules.block.C2f [32, 32, 1, True]
3 -1 1 18560 ultralytics.nn.modules.conv.Conv [32, 64, 3, 2]
4 -1 2 49664 ultralytics.nn.modules.block.C2f [64, 64, 2, True]
5 -1 1 73984 ultralytics.nn.modules.conv.Conv [64, 128, 3, 2]
6 -1 2 197632 ultralytics.nn.modules.block.C2f [128, 128, 2, True]
7 -1 1 295424 ultralytics.nn.modules.conv.Conv [128, 256, 3, 2]
8 -1 1 460288 ultralytics.nn.modules.block.C2f [256, 256, 1, True]
9 -1 1 164608 ultralytics.nn.modules.block.SPPF [256, 256, 5]
10 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest']
11 [-1, 6] 1 0 ultralytics.nn.modules.conv.Concat [1]
12 -1 1 148224 ultralytics.nn.modules.block.C2f [384, 128, 1]
13 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest']
14 [-1, 4] 1 0 ultralytics.nn.modules.conv.Concat [1]
15 -1 1 37248 ultralytics.nn.modules.block.C2f [192, 64, 1]
16 -1 1 36992 ultralytics.nn.modules.conv.Conv [64, 64, 3, 2]
17 [-1, 12] 1 0 ultralytics.nn.modules.conv.Concat [1]
18 -1 1 123648 ultralytics.nn.modules.block.C2f [192, 128, 1]
19 -1 1 147712 ultralytics.nn.modules.conv.Conv [128, 128, 3, 2]
20 [-1, 9] 1 0 ultralytics.nn.modules.conv.Concat [1]
21 -1 1 493056 ultralytics.nn.modules.block.C2f [384, 256, 1]
22 [15, 18, 21] 1 757552 ultralytics.nn.modules.head.Detect [32, [64, 128, 256]]
Model summary: 225 layers, 3017088 parameters, 3017072 gradients, 8.2 GFLOPs
Transferred 319/355 items from pretrained weights
TensorBoard: Start with 'tensorboard --logdir runs/detect/Arm_Yolo_30', view at http://localhost:6006/
wandb: Currently logged in as: lassouedaymenla (comp1). Use `wandb login --relogin` to force relogin
/kaggle/working/wandb/run-20240519_072337-8oif2e99
Freezing layer 'model.22.dfl.conv.weight' AMP: running Automatic Mixed Precision (AMP) checks with YOLOv8n... AMP: checks passed ✅ WARNING ⚠️ updating to 'imgsz=512'. 'train' and 'val' imgsz must be an integer, while 'predict' and 'export' imgsz may be a [h, w] list or an integer, i.e. 'yolo export imgsz=640,480' or 'yolo export imgsz=640'
train: Scanning /kaggle/working/train/labels... 1080 images, 0 backgrounds, 0 corrupt: 100%|██████████| 1080/1080 [00:01<00:00, 599.06it/s]
train: New cache created: /kaggle/working/train/labels.cache
albumentations: Blur(p=0.01, blur_limit=(3, 7)), MedianBlur(p=0.01, blur_limit=(3, 7)), ToGray(p=0.01), CLAHE(p=0.01, clip_limit=(1, 4.0), tile_grid_size=(8, 8))
val: Scanning /kaggle/working/val/labels... 120 images, 0 backgrounds, 0 corrupt: 100%|██████████| 120/120 [00:00<00:00, 397.03it/s]
val: New cache created: /kaggle/working/val/labels.cache Plotting labels to runs/detect/Arm_Yolo_30/labels.jpg... optimizer: AdamW(lr=0.01, momentum=0.937) with parameter groups 57 weight(decay=0.0), 64 weight(decay=0.0005), 63 bias(decay=0.0) TensorBoard: model graph visualization added ✅ Image sizes 512 train, 512 val Using 4 dataloader workers Logging results to runs/detect/Arm_Yolo_30 Starting training for 30 epochs... Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
1/30 2.99G 1.295 3.374 1.172 395 512: 100%|██████████| 34/34 [00:16<00:00, 2.02it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:05<00:00, 2.71s/it]
all 120 1588 0.128 0.00968 0.00131 0.000386
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
2/30 2.93G 1.054 1.641 1.035 464 512: 100%|██████████| 34/34 [00:09<00:00, 3.46it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.36it/s]
all 120 1588 0.33 0.173 0.159 0.0781
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
3/30 2.99G 1.03 1.268 1.018 457 512: 100%|██████████| 34/34 [00:09<00:00, 3.64it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.36it/s]
all 120 1588 0.587 0.0313 0.0378 0.024
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
4/30 2.99G 1.018 1.136 1.018 338 512: 100%|██████████| 34/34 [00:09<00:00, 3.70it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.27it/s]
all 120 1588 0.554 0.346 0.357 0.2
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
5/30 2.88G 1.007 0.9786 1.016 440 512: 100%|██████████| 34/34 [00:09<00:00, 3.65it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.27it/s]
all 120 1588 0.659 0.395 0.445 0.256
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
6/30 2.88G 0.9523 0.9246 0.9931 404 512: 100%|██████████| 34/34 [00:09<00:00, 3.68it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.14it/s]
all 120 1588 1.83e-05 0.00232 9.85e-06 4.56e-06
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
7/30 2.94G 0.972 0.9152 0.9935 533 512: 100%|██████████| 34/34 [00:09<00:00, 3.66it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:01<00:00, 1.42it/s]
all 120 1588 0.335 0.0077 0.00868 0.00664
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
8/30 2.95G 0.9324 0.8403 0.9881 458 512: 100%|██████████| 34/34 [00:09<00:00, 3.69it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.25it/s]
all 120 1588 0.743 0.567 0.695 0.368
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
9/30 2.95G 0.9366 0.7911 0.9826 426 512: 100%|██████████| 34/34 [00:09<00:00, 3.68it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.08it/s]
all 120 1588 0.755 0.647 0.721 0.427
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
10/30 2.93G 0.922 0.7687 0.9806 550 512: 100%|██████████| 34/34 [00:09<00:00, 3.53it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.29it/s]
all 120 1588 0.926 0.794 0.856 0.52
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
11/30 2.95G 0.8916 0.7319 0.98 427 512: 100%|██████████| 34/34 [00:09<00:00, 3.73it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.19it/s]
all 120 1588 0.353 0.263 0.205 0.119
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
12/30 2.94G 0.9388 0.7076 0.9861 435 512: 100%|██████████| 34/34 [00:09<00:00, 3.70it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.16it/s]
all 120 1588 0.62 0.432 0.498 0.281
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
13/30 2.94G 0.8934 0.6805 0.9718 450 512: 100%|██████████| 34/34 [00:09<00:00, 3.45it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.33it/s]
all 120 1588 0.801 0.796 0.855 0.441
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
14/30 2.93G 0.8486 0.6493 0.9645 399 512: 100%|██████████| 34/34 [00:09<00:00, 3.62it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.38it/s]
all 120 1588 0.893 0.726 0.827 0.527
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
15/30 2.94G 0.8209 0.6523 0.9546 380 512: 100%|██████████| 34/34 [00:09<00:00, 3.63it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.36it/s]
all 120 1588 0.899 0.906 0.947 0.481
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
16/30 2.94G 0.7576 0.5868 0.9311 482 512: 100%|██████████| 34/34 [00:09<00:00, 3.40it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.39it/s]
all 120 1588 0.844 0.733 0.85 0.507
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
17/30 2.93G 0.7417 0.5999 0.9267 483 512: 100%|██████████| 34/34 [00:09<00:00, 3.63it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.29it/s]
all 120 1588 0.533 0.67 0.642 0.534
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
18/30 2.84G 0.7262 0.592 0.9245 407 512: 100%|██████████| 34/34 [00:09<00:00, 3.66it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.41it/s]
all 120 1588 0.95 0.824 0.904 0.788
Closing dataloader mosaic
albumentations: Blur(p=0.01, blur_limit=(3, 7)), MedianBlur(p=0.01, blur_limit=(3, 7)), ToGray(p=0.01), CLAHE(p=0.01, clip_limit=(1, 4.0), tile_grid_size=(8, 8))
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
19/30 2.82G 0.6474 0.4844 0.9086 318 512: 100%|██████████| 34/34 [00:14<00:00, 2.37it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.41it/s]
all 120 1588 0.949 0.837 0.968 0.808
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
20/30 2.8G 0.6357 0.4643 0.904 316 512: 100%|██████████| 34/34 [00:08<00:00, 3.99it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.52it/s]
all 120 1588 0.904 0.87 0.947 0.839
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
21/30 2.77G 0.6114 0.43 0.8969 316 512: 100%|██████████| 34/34 [00:08<00:00, 3.84it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.64it/s]
all 120 1588 0.874 0.903 0.972 0.86
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
22/30 2.79G 0.5838 0.4105 0.8861 314 512: 100%|██████████| 34/34 [00:08<00:00, 4.00it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.70it/s]
all 120 1588 0.898 0.919 0.949 0.849
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
23/30 2.79G 0.5608 0.4025 0.8799 318 512: 100%|██████████| 34/34 [00:08<00:00, 4.02it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.50it/s]
all 120 1588 0.978 0.906 0.956 0.804
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
24/30 2.79G 0.5453 0.389 0.8765 312 512: 100%|██████████| 34/34 [00:08<00:00, 3.82it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.50it/s]
all 120 1588 0.968 0.912 0.959 0.847
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
25/30 2.79G 0.5307 0.3763 0.8695 309 512: 100%|██████████| 34/34 [00:08<00:00, 4.00it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.44it/s]
all 120 1588 0.94 0.933 0.981 0.89
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
26/30 2.79G 0.5218 0.3676 0.8644 316 512: 100%|██████████| 34/34 [00:08<00:00, 4.04it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.41it/s]
all 120 1588 0.945 0.949 0.976 0.864
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
27/30 2.79G 0.5192 0.3597 0.866 320 512: 100%|██████████| 34/34 [00:08<00:00, 3.83it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.57it/s]
all 120 1588 0.944 0.946 0.992 0.892
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
28/30 2.77G 0.5049 0.3507 0.8604 320 512: 100%|██████████| 34/34 [00:08<00:00, 3.98it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.63it/s]
all 120 1588 0.953 0.939 0.992 0.893
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
29/30 2.79G 0.5002 0.3494 0.8603 321 512: 100%|██████████| 34/34 [00:08<00:00, 4.02it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.52it/s]
all 120 1588 0.942 0.946 0.991 0.9
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
30/30 2.77G 0.4919 0.3424 0.8567 324 512: 100%|██████████| 34/34 [00:08<00:00, 3.82it/s]
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:00<00:00, 2.60it/s]
all 120 1588 0.958 0.946 0.992 0.908
30 epochs completed in 0.099 hours. Optimizer stripped from runs/detect/Arm_Yolo_30/weights/last.pt, 6.2MB Optimizer stripped from runs/detect/Arm_Yolo_30/weights/best.pt, 6.2MB Validating runs/detect/Arm_Yolo_30/weights/best.pt... Ultralytics YOLOv8.2.18 🚀 Python-3.10.13 torch-2.1.2 CUDA:0 (Tesla P100-PCIE-16GB, 16276MiB) Model summary (fused): 168 layers, 3011888 parameters, 0 gradients, 8.1 GFLOPs
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 2/2 [00:03<00:00, 1.97s/it]
all 120 1588 0.958 0.946 0.992 0.908
Speed: 0.1ms preprocess, 1.6ms inference, 0.0ms loss, 1.3ms postprocess per image
Results saved to runs/detect/Arm_Yolo_30
VBox(children=(Label(value='12.895 MB of 12.895 MB uploaded\r'), FloatProgress(value=1.0, max=1.0)))
| lr/pg0 | █▅▂▂▂▂▂▂▂▂▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ |
| lr/pg1 | ▃▆████▇▇▇▇▆▆▆▅▅▅▄▄▃▃▃▂▂▂▂▁▁▁▁▁ |
| lr/pg2 | ▃▆████▇▇▇▇▆▆▆▅▅▅▄▄▃▃▃▂▂▂▂▁▁▁▁▁ |
| metrics/mAP50(B) | ▁▂▁▄▄▁▁▆▆▇▂▅▇▇█▇▆▇████████████ |
| metrics/mAP50-95(B) | ▁▂▁▃▃▁▁▄▄▅▂▃▄▅▅▅▅▇▇▇██▇███████ |
| metrics/precision(B) | ▂▃▅▅▆▁▃▆▆█▄▅▇▇▇▇▅██▇▇▇████████ |
| metrics/recall(B) | ▁▂▁▄▄▁▁▅▆▇▃▄▇▆█▆▆▇▇▇██████████ |
| model/GFLOPs | ▁ |
| model/parameters | ▁ |
| model/speed_PyTorch(ms) | ▁ |
| train/box_loss | █▆▆▆▅▅▅▅▅▅▄▅▄▄▄▃▃▃▂▂▂▂▂▁▁▁▁▁▁▁ |
| train/cls_loss | █▄▃▃▂▂▂▂▂▂▂▂▂▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁ |
| train/dfl_loss | █▅▅▅▅▄▄▄▄▄▄▄▄▃▃▃▃▃▂▂▂▂▂▁▁▁▁▁▁▁ |
| val/box_loss | █▆▅▅▅█▅▅▅▄▆▅▅▄▆▄▂▁▂▁▁▁▂▁▁▁▁▁▁▁ |
| val/cls_loss | ▅▅▃▃▇█▂▂▁▄▃▂▁▁▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁ |
| val/dfl_loss | █▃▃▂▂▅▂▂▂▂▃▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁ |
| lr/pg0 | 0.00013 |
| lr/pg1 | 0.00013 |
| lr/pg2 | 0.00013 |
| metrics/mAP50(B) | 0.99219 |
| metrics/mAP50-95(B) | 0.90805 |
| metrics/precision(B) | 0.95769 |
| metrics/recall(B) | 0.94589 |
| model/GFLOPs | 8.227 |
| model/parameters | 3017088 |
| model/speed_PyTorch(ms) | 2.374 |
| train/box_loss | 0.4919 |
| train/cls_loss | 0.34235 |
| train/dfl_loss | 0.8567 |
| val/box_loss | 0.40051 |
| val/cls_loss | 0.30524 |
| val/dfl_loss | 0.80803 |
./wandb/run-20240519_072337-8oif2e99/logs
CPU times: user 6min 17s, sys: 28.3 s, total: 6min 45s Wall time: 7min 11s
ultralytics.utils.metrics.DetMetrics object with attributes:
ap_class_index: array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31])
box: ultralytics.utils.metrics.Metric object
confusion_matrix: <ultralytics.utils.metrics.ConfusionMatrix object at 0x784b05b49120>
curves: ['Precision-Recall(B)', 'F1-Confidence(B)', 'Precision-Confidence(B)', 'Recall-Confidence(B)']
curves_results: [[array([ 0, 0.001001, 0.002002, 0.003003, 0.004004, 0.005005, 0.006006, 0.007007, 0.008008, 0.009009, 0.01001, 0.011011, 0.012012, 0.013013, 0.014014, 0.015015, 0.016016, 0.017017, 0.018018, 0.019019, 0.02002, 0.021021, 0.022022, 0.023023,
0.024024, 0.025025, 0.026026, 0.027027, 0.028028, 0.029029, 0.03003, 0.031031, 0.032032, 0.033033, 0.034034, 0.035035, 0.036036, 0.037037, 0.038038, 0.039039, 0.04004, 0.041041, 0.042042, 0.043043, 0.044044, 0.045045, 0.046046, 0.047047,
0.048048, 0.049049, 0.05005, 0.051051, 0.052052, 0.053053, 0.054054, 0.055055, 0.056056, 0.057057, 0.058058, 0.059059, 0.06006, 0.061061, 0.062062, 0.063063, 0.064064, 0.065065, 0.066066, 0.067067, 0.068068, 0.069069, 0.07007, 0.071071,
0.072072, 0.073073, 0.074074, 0.075075, 0.076076, 0.077077, 0.078078, 0.079079, 0.08008, 0.081081, 0.082082, 0.083083, 0.084084, 0.085085, 0.086086, 0.087087, 0.088088, 0.089089, 0.09009, 0.091091, 0.092092, 0.093093, 0.094094, 0.095095,
0.096096, 0.097097, 0.098098, 0.099099, 0.1001, 0.1011, 0.1021, 0.1031, 0.1041, 0.10511, 0.10611, 0.10711, 0.10811, 0.10911, 0.11011, 0.11111, 0.11211, 0.11311, 0.11411, 0.11512, 0.11612, 0.11712, 0.11812, 0.11912,
0.12012, 0.12112, 0.12212, 0.12312, 0.12412, 0.12513, 0.12613, 0.12713, 0.12813, 0.12913, 0.13013, 0.13113, 0.13213, 0.13313, 0.13413, 0.13514, 0.13614, 0.13714, 0.13814, 0.13914, 0.14014, 0.14114, 0.14214, 0.14314,
0.14414, 0.14515, 0.14615, 0.14715, 0.14815, 0.14915, 0.15015, 0.15115, 0.15215, 0.15315, 0.15415, 0.15516, 0.15616, 0.15716, 0.15816, 0.15916, 0.16016, 0.16116, 0.16216, 0.16316, 0.16416, 0.16517, 0.16617, 0.16717,
0.16817, 0.16917, 0.17017, 0.17117, 0.17217, 0.17317, 0.17417, 0.17518, 0.17618, 0.17718, 0.17818, 0.17918, 0.18018, 0.18118, 0.18218, 0.18318, 0.18418, 0.18519, 0.18619, 0.18719, 0.18819, 0.18919, 0.19019, 0.19119,
0.19219, 0.19319, 0.19419, 0.1952, 0.1962, 0.1972, 0.1982, 0.1992, 0.2002, 0.2012, 0.2022, 0.2032, 0.2042, 0.20521, 0.20621, 0.20721, 0.20821, 0.20921, 0.21021, 0.21121, 0.21221, 0.21321, 0.21421, 0.21522,
0.21622, 0.21722, 0.21822, 0.21922, 0.22022, 0.22122, 0.22222, 0.22322, 0.22422, 0.22523, 0.22623, 0.22723, 0.22823, 0.22923, 0.23023, 0.23123, 0.23223, 0.23323, 0.23423, 0.23524, 0.23624, 0.23724, 0.23824, 0.23924,
0.24024, 0.24124, 0.24224, 0.24324, 0.24424, 0.24525, 0.24625, 0.24725, 0.24825, 0.24925, 0.25025, 0.25125, 0.25225, 0.25325, 0.25425, 0.25526, 0.25626, 0.25726, 0.25826, 0.25926, 0.26026, 0.26126, 0.26226, 0.26326,
0.26426, 0.26527, 0.26627, 0.26727, 0.26827, 0.26927, 0.27027, 0.27127, 0.27227, 0.27327, 0.27427, 0.27528, 0.27628, 0.27728, 0.27828, 0.27928, 0.28028, 0.28128, 0.28228, 0.28328, 0.28428, 0.28529, 0.28629, 0.28729,
0.28829, 0.28929, 0.29029, 0.29129, 0.29229, 0.29329, 0.29429, 0.2953, 0.2963, 0.2973, 0.2983, 0.2993, 0.3003, 0.3013, 0.3023, 0.3033, 0.3043, 0.30531, 0.30631, 0.30731, 0.30831, 0.30931, 0.31031, 0.31131,
0.31231, 0.31331, 0.31431, 0.31532, 0.31632, 0.31732, 0.31832, 0.31932, 0.32032, 0.32132, 0.32232, 0.32332, 0.32432, 0.32533, 0.32633, 0.32733, 0.32833, 0.32933, 0.33033, 0.33133, 0.33233, 0.33333, 0.33433, 0.33534,
0.33634, 0.33734, 0.33834, 0.33934, 0.34034, 0.34134, 0.34234, 0.34334, 0.34434, 0.34535, 0.34635, 0.34735, 0.34835, 0.34935, 0.35035, 0.35135, 0.35235, 0.35335, 0.35435, 0.35536, 0.35636, 0.35736, 0.35836, 0.35936,
0.36036, 0.36136, 0.36236, 0.36336, 0.36436, 0.36537, 0.36637, 0.36737, 0.36837, 0.36937, 0.37037, 0.37137, 0.37237, 0.37337, 0.37437, 0.37538, 0.37638, 0.37738, 0.37838, 0.37938, 0.38038, 0.38138, 0.38238, 0.38338,
0.38438, 0.38539, 0.38639, 0.38739, 0.38839, 0.38939, 0.39039, 0.39139, 0.39239, 0.39339, 0.39439, 0.3954, 0.3964, 0.3974, 0.3984, 0.3994, 0.4004, 0.4014, 0.4024, 0.4034, 0.4044, 0.40541, 0.40641, 0.40741,
0.40841, 0.40941, 0.41041, 0.41141, 0.41241, 0.41341, 0.41441, 0.41542, 0.41642, 0.41742, 0.41842, 0.41942, 0.42042, 0.42142, 0.42242, 0.42342, 0.42442, 0.42543, 0.42643, 0.42743, 0.42843, 0.42943, 0.43043, 0.43143,
0.43243, 0.43343, 0.43443, 0.43544, 0.43644, 0.43744, 0.43844, 0.43944, 0.44044, 0.44144, 0.44244, 0.44344, 0.44444, 0.44545, 0.44645, 0.44745, 0.44845, 0.44945, 0.45045, 0.45145, 0.45245, 0.45345, 0.45445, 0.45546,
0.45646, 0.45746, 0.45846, 0.45946, 0.46046, 0.46146, 0.46246, 0.46346, 0.46446, 0.46547, 0.46647, 0.46747, 0.46847, 0.46947, 0.47047, 0.47147, 0.47247, 0.47347, 0.47447, 0.47548, 0.47648, 0.47748, 0.47848, 0.47948,
0.48048, 0.48148, 0.48248, 0.48348, 0.48448, 0.48549, 0.48649, 0.48749, 0.48849, 0.48949, 0.49049, 0.49149, 0.49249, 0.49349, 0.49449, 0.4955, 0.4965, 0.4975, 0.4985, 0.4995, 0.5005, 0.5015, 0.5025, 0.5035,
0.5045, 0.50551, 0.50651, 0.50751, 0.50851, 0.50951, 0.51051, 0.51151, 0.51251, 0.51351, 0.51451, 0.51552, 0.51652, 0.51752, 0.51852, 0.51952, 0.52052, 0.52152, 0.52252, 0.52352, 0.52452, 0.52553, 0.52653, 0.52753,
0.52853, 0.52953, 0.53053, 0.53153, 0.53253, 0.53353, 0.53453, 0.53554, 0.53654, 0.53754, 0.53854, 0.53954, 0.54054, 0.54154, 0.54254, 0.54354, 0.54454, 0.54555, 0.54655, 0.54755, 0.54855, 0.54955, 0.55055, 0.55155,
0.55255, 0.55355, 0.55455, 0.55556, 0.55656, 0.55756, 0.55856, 0.55956, 0.56056, 0.56156, 0.56256, 0.56356, 0.56456, 0.56557, 0.56657, 0.56757, 0.56857, 0.56957, 0.57057, 0.57157, 0.57257, 0.57357, 0.57457, 0.57558,
0.57658, 0.57758, 0.57858, 0.57958, 0.58058, 0.58158, 0.58258, 0.58358, 0.58458, 0.58559, 0.58659, 0.58759, 0.58859, 0.58959, 0.59059, 0.59159, 0.59259, 0.59359, 0.59459, 0.5956, 0.5966, 0.5976, 0.5986, 0.5996,
0.6006, 0.6016, 0.6026, 0.6036, 0.6046, 0.60561, 0.60661, 0.60761, 0.60861, 0.60961, 0.61061, 0.61161, 0.61261, 0.61361, 0.61461, 0.61562, 0.61662, 0.61762, 0.61862, 0.61962, 0.62062, 0.62162, 0.62262, 0.62362,
0.62462, 0.62563, 0.62663, 0.62763, 0.62863, 0.62963, 0.63063, 0.63163, 0.63263, 0.63363, 0.63463, 0.63564, 0.63664, 0.63764, 0.63864, 0.63964, 0.64064, 0.64164, 0.64264, 0.64364, 0.64464, 0.64565, 0.64665, 0.64765,
0.64865, 0.64965, 0.65065, 0.65165, 0.65265, 0.65365, 0.65465, 0.65566, 0.65666, 0.65766, 0.65866, 0.65966, 0.66066, 0.66166, 0.66266, 0.66366, 0.66466, 0.66567, 0.66667, 0.66767, 0.66867, 0.66967, 0.67067, 0.67167,
0.67267, 0.67367, 0.67467, 0.67568, 0.67668, 0.67768, 0.67868, 0.67968, 0.68068, 0.68168, 0.68268, 0.68368, 0.68468, 0.68569, 0.68669, 0.68769, 0.68869, 0.68969, 0.69069, 0.69169, 0.69269, 0.69369, 0.69469, 0.6957,
0.6967, 0.6977, 0.6987, 0.6997, 0.7007, 0.7017, 0.7027, 0.7037, 0.7047, 0.70571, 0.70671, 0.70771, 0.70871, 0.70971, 0.71071, 0.71171, 0.71271, 0.71371, 0.71471, 0.71572, 0.71672, 0.71772, 0.71872, 0.71972,
0.72072, 0.72172, 0.72272, 0.72372, 0.72472, 0.72573, 0.72673, 0.72773, 0.72873, 0.72973, 0.73073, 0.73173, 0.73273, 0.73373, 0.73473, 0.73574, 0.73674, 0.73774, 0.73874, 0.73974, 0.74074, 0.74174, 0.74274, 0.74374,
0.74474, 0.74575, 0.74675, 0.74775, 0.74875, 0.74975, 0.75075, 0.75175, 0.75275, 0.75375, 0.75475, 0.75576, 0.75676, 0.75776, 0.75876, 0.75976, 0.76076, 0.76176, 0.76276, 0.76376, 0.76476, 0.76577, 0.76677, 0.76777,
0.76877, 0.76977, 0.77077, 0.77177, 0.77277, 0.77377, 0.77477, 0.77578, 0.77678, 0.77778, 0.77878, 0.77978, 0.78078, 0.78178, 0.78278, 0.78378, 0.78478, 0.78579, 0.78679, 0.78779, 0.78879, 0.78979, 0.79079, 0.79179,
0.79279, 0.79379, 0.79479, 0.7958, 0.7968, 0.7978, 0.7988, 0.7998, 0.8008, 0.8018, 0.8028, 0.8038, 0.8048, 0.80581, 0.80681, 0.80781, 0.80881, 0.80981, 0.81081, 0.81181, 0.81281, 0.81381, 0.81481, 0.81582,
0.81682, 0.81782, 0.81882, 0.81982, 0.82082, 0.82182, 0.82282, 0.82382, 0.82482, 0.82583, 0.82683, 0.82783, 0.82883, 0.82983, 0.83083, 0.83183, 0.83283, 0.83383, 0.83483, 0.83584, 0.83684, 0.83784, 0.83884, 0.83984,
0.84084, 0.84184, 0.84284, 0.84384, 0.84484, 0.84585, 0.84685, 0.84785, 0.84885, 0.84985, 0.85085, 0.85185, 0.85285, 0.85385, 0.85485, 0.85586, 0.85686, 0.85786, 0.85886, 0.85986, 0.86086, 0.86186, 0.86286, 0.86386,
0.86486, 0.86587, 0.86687, 0.86787, 0.86887, 0.86987, 0.87087, 0.87187, 0.87287, 0.87387, 0.87487, 0.87588, 0.87688, 0.87788, 0.87888, 0.87988, 0.88088, 0.88188, 0.88288, 0.88388, 0.88488, 0.88589, 0.88689, 0.88789,
0.88889, 0.88989, 0.89089, 0.89189, 0.89289, 0.89389, 0.89489, 0.8959, 0.8969, 0.8979, 0.8989, 0.8999, 0.9009, 0.9019, 0.9029, 0.9039, 0.9049, 0.90591, 0.90691, 0.90791, 0.90891, 0.90991, 0.91091, 0.91191,
0.91291, 0.91391, 0.91491, 0.91592, 0.91692, 0.91792, 0.91892, 0.91992, 0.92092, 0.92192, 0.92292, 0.92392, 0.92492, 0.92593, 0.92693, 0.92793, 0.92893, 0.92993, 0.93093, 0.93193, 0.93293, 0.93393, 0.93493, 0.93594,
0.93694, 0.93794, 0.93894, 0.93994, 0.94094, 0.94194, 0.94294, 0.94394, 0.94494, 0.94595, 0.94695, 0.94795, 0.94895, 0.94995, 0.95095, 0.95195, 0.95295, 0.95395, 0.95495, 0.95596, 0.95696, 0.95796, 0.95896, 0.95996,
0.96096, 0.96196, 0.96296, 0.96396, 0.96496, 0.96597, 0.96697, 0.96797, 0.96897, 0.96997, 0.97097, 0.97197, 0.97297, 0.97397, 0.97497, 0.97598, 0.97698, 0.97798, 0.97898, 0.97998, 0.98098, 0.98198, 0.98298, 0.98398,
0.98498, 0.98599, 0.98699, 0.98799, 0.98899, 0.98999, 0.99099, 0.99199, 0.99299, 0.99399, 0.99499, 0.996, 0.997, 0.998, 0.999, 1]), array([[ 1, 1, 1, ..., 1, 1, 0],
[ 1, 1, 1, ..., 1, 1, 0],
[ 1, 1, 1, ..., 1, 1, 0],
...,
[ 1, 1, 1, ..., 0.37705, 0.37705, 0],
[ 1, 1, 1, ..., 0.027509, 0.013755, 0],
[ 1, 1, 1, ..., 1, 1, 0]]), 'Recall', 'Precision'], [array([ 0, 0.001001, 0.002002, 0.003003, 0.004004, 0.005005, 0.006006, 0.007007, 0.008008, 0.009009, 0.01001, 0.011011, 0.012012, 0.013013, 0.014014, 0.015015, 0.016016, 0.017017, 0.018018, 0.019019, 0.02002, 0.021021, 0.022022, 0.023023,
0.024024, 0.025025, 0.026026, 0.027027, 0.028028, 0.029029, 0.03003, 0.031031, 0.032032, 0.033033, 0.034034, 0.035035, 0.036036, 0.037037, 0.038038, 0.039039, 0.04004, 0.041041, 0.042042, 0.043043, 0.044044, 0.045045, 0.046046, 0.047047,
0.048048, 0.049049, 0.05005, 0.051051, 0.052052, 0.053053, 0.054054, 0.055055, 0.056056, 0.057057, 0.058058, 0.059059, 0.06006, 0.061061, 0.062062, 0.063063, 0.064064, 0.065065, 0.066066, 0.067067, 0.068068, 0.069069, 0.07007, 0.071071,
0.072072, 0.073073, 0.074074, 0.075075, 0.076076, 0.077077, 0.078078, 0.079079, 0.08008, 0.081081, 0.082082, 0.083083, 0.084084, 0.085085, 0.086086, 0.087087, 0.088088, 0.089089, 0.09009, 0.091091, 0.092092, 0.093093, 0.094094, 0.095095,
0.096096, 0.097097, 0.098098, 0.099099, 0.1001, 0.1011, 0.1021, 0.1031, 0.1041, 0.10511, 0.10611, 0.10711, 0.10811, 0.10911, 0.11011, 0.11111, 0.11211, 0.11311, 0.11411, 0.11512, 0.11612, 0.11712, 0.11812, 0.11912,
0.12012, 0.12112, 0.12212, 0.12312, 0.12412, 0.12513, 0.12613, 0.12713, 0.12813, 0.12913, 0.13013, 0.13113, 0.13213, 0.13313, 0.13413, 0.13514, 0.13614, 0.13714, 0.13814, 0.13914, 0.14014, 0.14114, 0.14214, 0.14314,
0.14414, 0.14515, 0.14615, 0.14715, 0.14815, 0.14915, 0.15015, 0.15115, 0.15215, 0.15315, 0.15415, 0.15516, 0.15616, 0.15716, 0.15816, 0.15916, 0.16016, 0.16116, 0.16216, 0.16316, 0.16416, 0.16517, 0.16617, 0.16717,
0.16817, 0.16917, 0.17017, 0.17117, 0.17217, 0.17317, 0.17417, 0.17518, 0.17618, 0.17718, 0.17818, 0.17918, 0.18018, 0.18118, 0.18218, 0.18318, 0.18418, 0.18519, 0.18619, 0.18719, 0.18819, 0.18919, 0.19019, 0.19119,
0.19219, 0.19319, 0.19419, 0.1952, 0.1962, 0.1972, 0.1982, 0.1992, 0.2002, 0.2012, 0.2022, 0.2032, 0.2042, 0.20521, 0.20621, 0.20721, 0.20821, 0.20921, 0.21021, 0.21121, 0.21221, 0.21321, 0.21421, 0.21522,
0.21622, 0.21722, 0.21822, 0.21922, 0.22022, 0.22122, 0.22222, 0.22322, 0.22422, 0.22523, 0.22623, 0.22723, 0.22823, 0.22923, 0.23023, 0.23123, 0.23223, 0.23323, 0.23423, 0.23524, 0.23624, 0.23724, 0.23824, 0.23924,
0.24024, 0.24124, 0.24224, 0.24324, 0.24424, 0.24525, 0.24625, 0.24725, 0.24825, 0.24925, 0.25025, 0.25125, 0.25225, 0.25325, 0.25425, 0.25526, 0.25626, 0.25726, 0.25826, 0.25926, 0.26026, 0.26126, 0.26226, 0.26326,
0.26426, 0.26527, 0.26627, 0.26727, 0.26827, 0.26927, 0.27027, 0.27127, 0.27227, 0.27327, 0.27427, 0.27528, 0.27628, 0.27728, 0.27828, 0.27928, 0.28028, 0.28128, 0.28228, 0.28328, 0.28428, 0.28529, 0.28629, 0.28729,
0.28829, 0.28929, 0.29029, 0.29129, 0.29229, 0.29329, 0.29429, 0.2953, 0.2963, 0.2973, 0.2983, 0.2993, 0.3003, 0.3013, 0.3023, 0.3033, 0.3043, 0.30531, 0.30631, 0.30731, 0.30831, 0.30931, 0.31031, 0.31131,
0.31231, 0.31331, 0.31431, 0.31532, 0.31632, 0.31732, 0.31832, 0.31932, 0.32032, 0.32132, 0.32232, 0.32332, 0.32432, 0.32533, 0.32633, 0.32733, 0.32833, 0.32933, 0.33033, 0.33133, 0.33233, 0.33333, 0.33433, 0.33534,
0.33634, 0.33734, 0.33834, 0.33934, 0.34034, 0.34134, 0.34234, 0.34334, 0.34434, 0.34535, 0.34635, 0.34735, 0.34835, 0.34935, 0.35035, 0.35135, 0.35235, 0.35335, 0.35435, 0.35536, 0.35636, 0.35736, 0.35836, 0.35936,
0.36036, 0.36136, 0.36236, 0.36336, 0.36436, 0.36537, 0.36637, 0.36737, 0.36837, 0.36937, 0.37037, 0.37137, 0.37237, 0.37337, 0.37437, 0.37538, 0.37638, 0.37738, 0.37838, 0.37938, 0.38038, 0.38138, 0.38238, 0.38338,
0.38438, 0.38539, 0.38639, 0.38739, 0.38839, 0.38939, 0.39039, 0.39139, 0.39239, 0.39339, 0.39439, 0.3954, 0.3964, 0.3974, 0.3984, 0.3994, 0.4004, 0.4014, 0.4024, 0.4034, 0.4044, 0.40541, 0.40641, 0.40741,
0.40841, 0.40941, 0.41041, 0.41141, 0.41241, 0.41341, 0.41441, 0.41542, 0.41642, 0.41742, 0.41842, 0.41942, 0.42042, 0.42142, 0.42242, 0.42342, 0.42442, 0.42543, 0.42643, 0.42743, 0.42843, 0.42943, 0.43043, 0.43143,
0.43243, 0.43343, 0.43443, 0.43544, 0.43644, 0.43744, 0.43844, 0.43944, 0.44044, 0.44144, 0.44244, 0.44344, 0.44444, 0.44545, 0.44645, 0.44745, 0.44845, 0.44945, 0.45045, 0.45145, 0.45245, 0.45345, 0.45445, 0.45546,
0.45646, 0.45746, 0.45846, 0.45946, 0.46046, 0.46146, 0.46246, 0.46346, 0.46446, 0.46547, 0.46647, 0.46747, 0.46847, 0.46947, 0.47047, 0.47147, 0.47247, 0.47347, 0.47447, 0.47548, 0.47648, 0.47748, 0.47848, 0.47948,
0.48048, 0.48148, 0.48248, 0.48348, 0.48448, 0.48549, 0.48649, 0.48749, 0.48849, 0.48949, 0.49049, 0.49149, 0.49249, 0.49349, 0.49449, 0.4955, 0.4965, 0.4975, 0.4985, 0.4995, 0.5005, 0.5015, 0.5025, 0.5035,
0.5045, 0.50551, 0.50651, 0.50751, 0.50851, 0.50951, 0.51051, 0.51151, 0.51251, 0.51351, 0.51451, 0.51552, 0.51652, 0.51752, 0.51852, 0.51952, 0.52052, 0.52152, 0.52252, 0.52352, 0.52452, 0.52553, 0.52653, 0.52753,
0.52853, 0.52953, 0.53053, 0.53153, 0.53253, 0.53353, 0.53453, 0.53554, 0.53654, 0.53754, 0.53854, 0.53954, 0.54054, 0.54154, 0.54254, 0.54354, 0.54454, 0.54555, 0.54655, 0.54755, 0.54855, 0.54955, 0.55055, 0.55155,
0.55255, 0.55355, 0.55455, 0.55556, 0.55656, 0.55756, 0.55856, 0.55956, 0.56056, 0.56156, 0.56256, 0.56356, 0.56456, 0.56557, 0.56657, 0.56757, 0.56857, 0.56957, 0.57057, 0.57157, 0.57257, 0.57357, 0.57457, 0.57558,
0.57658, 0.57758, 0.57858, 0.57958, 0.58058, 0.58158, 0.58258, 0.58358, 0.58458, 0.58559, 0.58659, 0.58759, 0.58859, 0.58959, 0.59059, 0.59159, 0.59259, 0.59359, 0.59459, 0.5956, 0.5966, 0.5976, 0.5986, 0.5996,
0.6006, 0.6016, 0.6026, 0.6036, 0.6046, 0.60561, 0.60661, 0.60761, 0.60861, 0.60961, 0.61061, 0.61161, 0.61261, 0.61361, 0.61461, 0.61562, 0.61662, 0.61762, 0.61862, 0.61962, 0.62062, 0.62162, 0.62262, 0.62362,
0.62462, 0.62563, 0.62663, 0.62763, 0.62863, 0.62963, 0.63063, 0.63163, 0.63263, 0.63363, 0.63463, 0.63564, 0.63664, 0.63764, 0.63864, 0.63964, 0.64064, 0.64164, 0.64264, 0.64364, 0.64464, 0.64565, 0.64665, 0.64765,
0.64865, 0.64965, 0.65065, 0.65165, 0.65265, 0.65365, 0.65465, 0.65566, 0.65666, 0.65766, 0.65866, 0.65966, 0.66066, 0.66166, 0.66266, 0.66366, 0.66466, 0.66567, 0.66667, 0.66767, 0.66867, 0.66967, 0.67067, 0.67167,
0.67267, 0.67367, 0.67467, 0.67568, 0.67668, 0.67768, 0.67868, 0.67968, 0.68068, 0.68168, 0.68268, 0.68368, 0.68468, 0.68569, 0.68669, 0.68769, 0.68869, 0.68969, 0.69069, 0.69169, 0.69269, 0.69369, 0.69469, 0.6957,
0.6967, 0.6977, 0.6987, 0.6997, 0.7007, 0.7017, 0.7027, 0.7037, 0.7047, 0.70571, 0.70671, 0.70771, 0.70871, 0.70971, 0.71071, 0.71171, 0.71271, 0.71371, 0.71471, 0.71572, 0.71672, 0.71772, 0.71872, 0.71972,
0.72072, 0.72172, 0.72272, 0.72372, 0.72472, 0.72573, 0.72673, 0.72773, 0.72873, 0.72973, 0.73073, 0.73173, 0.73273, 0.73373, 0.73473, 0.73574, 0.73674, 0.73774, 0.73874, 0.73974, 0.74074, 0.74174, 0.74274, 0.74374,
0.74474, 0.74575, 0.74675, 0.74775, 0.74875, 0.74975, 0.75075, 0.75175, 0.75275, 0.75375, 0.75475, 0.75576, 0.75676, 0.75776, 0.75876, 0.75976, 0.76076, 0.76176, 0.76276, 0.76376, 0.76476, 0.76577, 0.76677, 0.76777,
0.76877, 0.76977, 0.77077, 0.77177, 0.77277, 0.77377, 0.77477, 0.77578, 0.77678, 0.77778, 0.77878, 0.77978, 0.78078, 0.78178, 0.78278, 0.78378, 0.78478, 0.78579, 0.78679, 0.78779, 0.78879, 0.78979, 0.79079, 0.79179,
0.79279, 0.79379, 0.79479, 0.7958, 0.7968, 0.7978, 0.7988, 0.7998, 0.8008, 0.8018, 0.8028, 0.8038, 0.8048, 0.80581, 0.80681, 0.80781, 0.80881, 0.80981, 0.81081, 0.81181, 0.81281, 0.81381, 0.81481, 0.81582,
0.81682, 0.81782, 0.81882, 0.81982, 0.82082, 0.82182, 0.82282, 0.82382, 0.82482, 0.82583, 0.82683, 0.82783, 0.82883, 0.82983, 0.83083, 0.83183, 0.83283, 0.83383, 0.83483, 0.83584, 0.83684, 0.83784, 0.83884, 0.83984,
0.84084, 0.84184, 0.84284, 0.84384, 0.84484, 0.84585, 0.84685, 0.84785, 0.84885, 0.84985, 0.85085, 0.85185, 0.85285, 0.85385, 0.85485, 0.85586, 0.85686, 0.85786, 0.85886, 0.85986, 0.86086, 0.86186, 0.86286, 0.86386,
0.86486, 0.86587, 0.86687, 0.86787, 0.86887, 0.86987, 0.87087, 0.87187, 0.87287, 0.87387, 0.87487, 0.87588, 0.87688, 0.87788, 0.87888, 0.87988, 0.88088, 0.88188, 0.88288, 0.88388, 0.88488, 0.88589, 0.88689, 0.88789,
0.88889, 0.88989, 0.89089, 0.89189, 0.89289, 0.89389, 0.89489, 0.8959, 0.8969, 0.8979, 0.8989, 0.8999, 0.9009, 0.9019, 0.9029, 0.9039, 0.9049, 0.90591, 0.90691, 0.90791, 0.90891, 0.90991, 0.91091, 0.91191,
0.91291, 0.91391, 0.91491, 0.91592, 0.91692, 0.91792, 0.91892, 0.91992, 0.92092, 0.92192, 0.92292, 0.92392, 0.92492, 0.92593, 0.92693, 0.92793, 0.92893, 0.92993, 0.93093, 0.93193, 0.93293, 0.93393, 0.93493, 0.93594,
0.93694, 0.93794, 0.93894, 0.93994, 0.94094, 0.94194, 0.94294, 0.94394, 0.94494, 0.94595, 0.94695, 0.94795, 0.94895, 0.94995, 0.95095, 0.95195, 0.95295, 0.95395, 0.95495, 0.95596, 0.95696, 0.95796, 0.95896, 0.95996,
0.96096, 0.96196, 0.96296, 0.96396, 0.96496, 0.96597, 0.96697, 0.96797, 0.96897, 0.96997, 0.97097, 0.97197, 0.97297, 0.97397, 0.97497, 0.97598, 0.97698, 0.97798, 0.97898, 0.97998, 0.98098, 0.98198, 0.98298, 0.98398,
0.98498, 0.98599, 0.98699, 0.98799, 0.98899, 0.98999, 0.99099, 0.99199, 0.99299, 0.99399, 0.99499, 0.996, 0.997, 0.998, 0.999, 1]), array([[ 0.38462, 0.38462, 0.47616, ..., 0, 0, 0],
[ 0.45794, 0.45794, 0.51248, ..., 0, 0, 0],
[ 0.40553, 0.40553, 0.50758, ..., 0, 0, 0],
...,
[ 0.45695, 0.45695, 0.50173, ..., 0, 0, 0],
[ 0.41633, 0.41633, 0.46565, ..., 0, 0, 0],
[ 0.026667, 0.026667, 0.036752, ..., 0, 0, 0]]), 'Confidence', 'F1'], [array([ 0, 0.001001, 0.002002, 0.003003, 0.004004, 0.005005, 0.006006, 0.007007, 0.008008, 0.009009, 0.01001, 0.011011, 0.012012, 0.013013, 0.014014, 0.015015, 0.016016, 0.017017, 0.018018, 0.019019, 0.02002, 0.021021, 0.022022, 0.023023,
0.024024, 0.025025, 0.026026, 0.027027, 0.028028, 0.029029, 0.03003, 0.031031, 0.032032, 0.033033, 0.034034, 0.035035, 0.036036, 0.037037, 0.038038, 0.039039, 0.04004, 0.041041, 0.042042, 0.043043, 0.044044, 0.045045, 0.046046, 0.047047,
0.048048, 0.049049, 0.05005, 0.051051, 0.052052, 0.053053, 0.054054, 0.055055, 0.056056, 0.057057, 0.058058, 0.059059, 0.06006, 0.061061, 0.062062, 0.063063, 0.064064, 0.065065, 0.066066, 0.067067, 0.068068, 0.069069, 0.07007, 0.071071,
0.072072, 0.073073, 0.074074, 0.075075, 0.076076, 0.077077, 0.078078, 0.079079, 0.08008, 0.081081, 0.082082, 0.083083, 0.084084, 0.085085, 0.086086, 0.087087, 0.088088, 0.089089, 0.09009, 0.091091, 0.092092, 0.093093, 0.094094, 0.095095,
0.096096, 0.097097, 0.098098, 0.099099, 0.1001, 0.1011, 0.1021, 0.1031, 0.1041, 0.10511, 0.10611, 0.10711, 0.10811, 0.10911, 0.11011, 0.11111, 0.11211, 0.11311, 0.11411, 0.11512, 0.11612, 0.11712, 0.11812, 0.11912,
0.12012, 0.12112, 0.12212, 0.12312, 0.12412, 0.12513, 0.12613, 0.12713, 0.12813, 0.12913, 0.13013, 0.13113, 0.13213, 0.13313, 0.13413, 0.13514, 0.13614, 0.13714, 0.13814, 0.13914, 0.14014, 0.14114, 0.14214, 0.14314,
0.14414, 0.14515, 0.14615, 0.14715, 0.14815, 0.14915, 0.15015, 0.15115, 0.15215, 0.15315, 0.15415, 0.15516, 0.15616, 0.15716, 0.15816, 0.15916, 0.16016, 0.16116, 0.16216, 0.16316, 0.16416, 0.16517, 0.16617, 0.16717,
0.16817, 0.16917, 0.17017, 0.17117, 0.17217, 0.17317, 0.17417, 0.17518, 0.17618, 0.17718, 0.17818, 0.17918, 0.18018, 0.18118, 0.18218, 0.18318, 0.18418, 0.18519, 0.18619, 0.18719, 0.18819, 0.18919, 0.19019, 0.19119,
0.19219, 0.19319, 0.19419, 0.1952, 0.1962, 0.1972, 0.1982, 0.1992, 0.2002, 0.2012, 0.2022, 0.2032, 0.2042, 0.20521, 0.20621, 0.20721, 0.20821, 0.20921, 0.21021, 0.21121, 0.21221, 0.21321, 0.21421, 0.21522,
0.21622, 0.21722, 0.21822, 0.21922, 0.22022, 0.22122, 0.22222, 0.22322, 0.22422, 0.22523, 0.22623, 0.22723, 0.22823, 0.22923, 0.23023, 0.23123, 0.23223, 0.23323, 0.23423, 0.23524, 0.23624, 0.23724, 0.23824, 0.23924,
0.24024, 0.24124, 0.24224, 0.24324, 0.24424, 0.24525, 0.24625, 0.24725, 0.24825, 0.24925, 0.25025, 0.25125, 0.25225, 0.25325, 0.25425, 0.25526, 0.25626, 0.25726, 0.25826, 0.25926, 0.26026, 0.26126, 0.26226, 0.26326,
0.26426, 0.26527, 0.26627, 0.26727, 0.26827, 0.26927, 0.27027, 0.27127, 0.27227, 0.27327, 0.27427, 0.27528, 0.27628, 0.27728, 0.27828, 0.27928, 0.28028, 0.28128, 0.28228, 0.28328, 0.28428, 0.28529, 0.28629, 0.28729,
0.28829, 0.28929, 0.29029, 0.29129, 0.29229, 0.29329, 0.29429, 0.2953, 0.2963, 0.2973, 0.2983, 0.2993, 0.3003, 0.3013, 0.3023, 0.3033, 0.3043, 0.30531, 0.30631, 0.30731, 0.30831, 0.30931, 0.31031, 0.31131,
0.31231, 0.31331, 0.31431, 0.31532, 0.31632, 0.31732, 0.31832, 0.31932, 0.32032, 0.32132, 0.32232, 0.32332, 0.32432, 0.32533, 0.32633, 0.32733, 0.32833, 0.32933, 0.33033, 0.33133, 0.33233, 0.33333, 0.33433, 0.33534,
0.33634, 0.33734, 0.33834, 0.33934, 0.34034, 0.34134, 0.34234, 0.34334, 0.34434, 0.34535, 0.34635, 0.34735, 0.34835, 0.34935, 0.35035, 0.35135, 0.35235, 0.35335, 0.35435, 0.35536, 0.35636, 0.35736, 0.35836, 0.35936,
0.36036, 0.36136, 0.36236, 0.36336, 0.36436, 0.36537, 0.36637, 0.36737, 0.36837, 0.36937, 0.37037, 0.37137, 0.37237, 0.37337, 0.37437, 0.37538, 0.37638, 0.37738, 0.37838, 0.37938, 0.38038, 0.38138, 0.38238, 0.38338,
0.38438, 0.38539, 0.38639, 0.38739, 0.38839, 0.38939, 0.39039, 0.39139, 0.39239, 0.39339, 0.39439, 0.3954, 0.3964, 0.3974, 0.3984, 0.3994, 0.4004, 0.4014, 0.4024, 0.4034, 0.4044, 0.40541, 0.40641, 0.40741,
0.40841, 0.40941, 0.41041, 0.41141, 0.41241, 0.41341, 0.41441, 0.41542, 0.41642, 0.41742, 0.41842, 0.41942, 0.42042, 0.42142, 0.42242, 0.42342, 0.42442, 0.42543, 0.42643, 0.42743, 0.42843, 0.42943, 0.43043, 0.43143,
0.43243, 0.43343, 0.43443, 0.43544, 0.43644, 0.43744, 0.43844, 0.43944, 0.44044, 0.44144, 0.44244, 0.44344, 0.44444, 0.44545, 0.44645, 0.44745, 0.44845, 0.44945, 0.45045, 0.45145, 0.45245, 0.45345, 0.45445, 0.45546,
0.45646, 0.45746, 0.45846, 0.45946, 0.46046, 0.46146, 0.46246, 0.46346, 0.46446, 0.46547, 0.46647, 0.46747, 0.46847, 0.46947, 0.47047, 0.47147, 0.47247, 0.47347, 0.47447, 0.47548, 0.47648, 0.47748, 0.47848, 0.47948,
0.48048, 0.48148, 0.48248, 0.48348, 0.48448, 0.48549, 0.48649, 0.48749, 0.48849, 0.48949, 0.49049, 0.49149, 0.49249, 0.49349, 0.49449, 0.4955, 0.4965, 0.4975, 0.4985, 0.4995, 0.5005, 0.5015, 0.5025, 0.5035,
0.5045, 0.50551, 0.50651, 0.50751, 0.50851, 0.50951, 0.51051, 0.51151, 0.51251, 0.51351, 0.51451, 0.51552, 0.51652, 0.51752, 0.51852, 0.51952, 0.52052, 0.52152, 0.52252, 0.52352, 0.52452, 0.52553, 0.52653, 0.52753,
0.52853, 0.52953, 0.53053, 0.53153, 0.53253, 0.53353, 0.53453, 0.53554, 0.53654, 0.53754, 0.53854, 0.53954, 0.54054, 0.54154, 0.54254, 0.54354, 0.54454, 0.54555, 0.54655, 0.54755, 0.54855, 0.54955, 0.55055, 0.55155,
0.55255, 0.55355, 0.55455, 0.55556, 0.55656, 0.55756, 0.55856, 0.55956, 0.56056, 0.56156, 0.56256, 0.56356, 0.56456, 0.56557, 0.56657, 0.56757, 0.56857, 0.56957, 0.57057, 0.57157, 0.57257, 0.57357, 0.57457, 0.57558,
0.57658, 0.57758, 0.57858, 0.57958, 0.58058, 0.58158, 0.58258, 0.58358, 0.58458, 0.58559, 0.58659, 0.58759, 0.58859, 0.58959, 0.59059, 0.59159, 0.59259, 0.59359, 0.59459, 0.5956, 0.5966, 0.5976, 0.5986, 0.5996,
0.6006, 0.6016, 0.6026, 0.6036, 0.6046, 0.60561, 0.60661, 0.60761, 0.60861, 0.60961, 0.61061, 0.61161, 0.61261, 0.61361, 0.61461, 0.61562, 0.61662, 0.61762, 0.61862, 0.61962, 0.62062, 0.62162, 0.62262, 0.62362,
0.62462, 0.62563, 0.62663, 0.62763, 0.62863, 0.62963, 0.63063, 0.63163, 0.63263, 0.63363, 0.63463, 0.63564, 0.63664, 0.63764, 0.63864, 0.63964, 0.64064, 0.64164, 0.64264, 0.64364, 0.64464, 0.64565, 0.64665, 0.64765,
0.64865, 0.64965, 0.65065, 0.65165, 0.65265, 0.65365, 0.65465, 0.65566, 0.65666, 0.65766, 0.65866, 0.65966, 0.66066, 0.66166, 0.66266, 0.66366, 0.66466, 0.66567, 0.66667, 0.66767, 0.66867, 0.66967, 0.67067, 0.67167,
0.67267, 0.67367, 0.67467, 0.67568, 0.67668, 0.67768, 0.67868, 0.67968, 0.68068, 0.68168, 0.68268, 0.68368, 0.68468, 0.68569, 0.68669, 0.68769, 0.68869, 0.68969, 0.69069, 0.69169, 0.69269, 0.69369, 0.69469, 0.6957,
0.6967, 0.6977, 0.6987, 0.6997, 0.7007, 0.7017, 0.7027, 0.7037, 0.7047, 0.70571, 0.70671, 0.70771, 0.70871, 0.70971, 0.71071, 0.71171, 0.71271, 0.71371, 0.71471, 0.71572, 0.71672, 0.71772, 0.71872, 0.71972,
0.72072, 0.72172, 0.72272, 0.72372, 0.72472, 0.72573, 0.72673, 0.72773, 0.72873, 0.72973, 0.73073, 0.73173, 0.73273, 0.73373, 0.73473, 0.73574, 0.73674, 0.73774, 0.73874, 0.73974, 0.74074, 0.74174, 0.74274, 0.74374,
0.74474, 0.74575, 0.74675, 0.74775, 0.74875, 0.74975, 0.75075, 0.75175, 0.75275, 0.75375, 0.75475, 0.75576, 0.75676, 0.75776, 0.75876, 0.75976, 0.76076, 0.76176, 0.76276, 0.76376, 0.76476, 0.76577, 0.76677, 0.76777,
0.76877, 0.76977, 0.77077, 0.77177, 0.77277, 0.77377, 0.77477, 0.77578, 0.77678, 0.77778, 0.77878, 0.77978, 0.78078, 0.78178, 0.78278, 0.78378, 0.78478, 0.78579, 0.78679, 0.78779, 0.78879, 0.78979, 0.79079, 0.79179,
0.79279, 0.79379, 0.79479, 0.7958, 0.7968, 0.7978, 0.7988, 0.7998, 0.8008, 0.8018, 0.8028, 0.8038, 0.8048, 0.80581, 0.80681, 0.80781, 0.80881, 0.80981, 0.81081, 0.81181, 0.81281, 0.81381, 0.81481, 0.81582,
0.81682, 0.81782, 0.81882, 0.81982, 0.82082, 0.82182, 0.82282, 0.82382, 0.82482, 0.82583, 0.82683, 0.82783, 0.82883, 0.82983, 0.83083, 0.83183, 0.83283, 0.83383, 0.83483, 0.83584, 0.83684, 0.83784, 0.83884, 0.83984,
0.84084, 0.84184, 0.84284, 0.84384, 0.84484, 0.84585, 0.84685, 0.84785, 0.84885, 0.84985, 0.85085, 0.85185, 0.85285, 0.85385, 0.85485, 0.85586, 0.85686, 0.85786, 0.85886, 0.85986, 0.86086, 0.86186, 0.86286, 0.86386,
0.86486, 0.86587, 0.86687, 0.86787, 0.86887, 0.86987, 0.87087, 0.87187, 0.87287, 0.87387, 0.87487, 0.87588, 0.87688, 0.87788, 0.87888, 0.87988, 0.88088, 0.88188, 0.88288, 0.88388, 0.88488, 0.88589, 0.88689, 0.88789,
0.88889, 0.88989, 0.89089, 0.89189, 0.89289, 0.89389, 0.89489, 0.8959, 0.8969, 0.8979, 0.8989, 0.8999, 0.9009, 0.9019, 0.9029, 0.9039, 0.9049, 0.90591, 0.90691, 0.90791, 0.90891, 0.90991, 0.91091, 0.91191,
0.91291, 0.91391, 0.91491, 0.91592, 0.91692, 0.91792, 0.91892, 0.91992, 0.92092, 0.92192, 0.92292, 0.92392, 0.92492, 0.92593, 0.92693, 0.92793, 0.92893, 0.92993, 0.93093, 0.93193, 0.93293, 0.93393, 0.93493, 0.93594,
0.93694, 0.93794, 0.93894, 0.93994, 0.94094, 0.94194, 0.94294, 0.94394, 0.94494, 0.94595, 0.94695, 0.94795, 0.94895, 0.94995, 0.95095, 0.95195, 0.95295, 0.95395, 0.95495, 0.95596, 0.95696, 0.95796, 0.95896, 0.95996,
0.96096, 0.96196, 0.96296, 0.96396, 0.96496, 0.96597, 0.96697, 0.96797, 0.96897, 0.96997, 0.97097, 0.97197, 0.97297, 0.97397, 0.97497, 0.97598, 0.97698, 0.97798, 0.97898, 0.97998, 0.98098, 0.98198, 0.98298, 0.98398,
0.98498, 0.98599, 0.98699, 0.98799, 0.98899, 0.98999, 0.99099, 0.99199, 0.99299, 0.99399, 0.99499, 0.996, 0.997, 0.998, 0.999, 1]), array([[ 0.2381, 0.2381, 0.31248, ..., 1, 1, 1],
[ 0.29697, 0.29697, 0.34452, ..., 1, 1, 1],
[ 0.25434, 0.25434, 0.3401, ..., 1, 1, 1],
...,
[ 0.29614, 0.29614, 0.33488, ..., 1, 1, 1],
[ 0.26425, 0.26425, 0.3053, ..., 1, 1, 1],
[ 0.013514, 0.013514, 0.01872, ..., 1, 1, 1]]), 'Confidence', 'Precision'], [array([ 0, 0.001001, 0.002002, 0.003003, 0.004004, 0.005005, 0.006006, 0.007007, 0.008008, 0.009009, 0.01001, 0.011011, 0.012012, 0.013013, 0.014014, 0.015015, 0.016016, 0.017017, 0.018018, 0.019019, 0.02002, 0.021021, 0.022022, 0.023023,
0.024024, 0.025025, 0.026026, 0.027027, 0.028028, 0.029029, 0.03003, 0.031031, 0.032032, 0.033033, 0.034034, 0.035035, 0.036036, 0.037037, 0.038038, 0.039039, 0.04004, 0.041041, 0.042042, 0.043043, 0.044044, 0.045045, 0.046046, 0.047047,
0.048048, 0.049049, 0.05005, 0.051051, 0.052052, 0.053053, 0.054054, 0.055055, 0.056056, 0.057057, 0.058058, 0.059059, 0.06006, 0.061061, 0.062062, 0.063063, 0.064064, 0.065065, 0.066066, 0.067067, 0.068068, 0.069069, 0.07007, 0.071071,
0.072072, 0.073073, 0.074074, 0.075075, 0.076076, 0.077077, 0.078078, 0.079079, 0.08008, 0.081081, 0.082082, 0.083083, 0.084084, 0.085085, 0.086086, 0.087087, 0.088088, 0.089089, 0.09009, 0.091091, 0.092092, 0.093093, 0.094094, 0.095095,
0.096096, 0.097097, 0.098098, 0.099099, 0.1001, 0.1011, 0.1021, 0.1031, 0.1041, 0.10511, 0.10611, 0.10711, 0.10811, 0.10911, 0.11011, 0.11111, 0.11211, 0.11311, 0.11411, 0.11512, 0.11612, 0.11712, 0.11812, 0.11912,
0.12012, 0.12112, 0.12212, 0.12312, 0.12412, 0.12513, 0.12613, 0.12713, 0.12813, 0.12913, 0.13013, 0.13113, 0.13213, 0.13313, 0.13413, 0.13514, 0.13614, 0.13714, 0.13814, 0.13914, 0.14014, 0.14114, 0.14214, 0.14314,
0.14414, 0.14515, 0.14615, 0.14715, 0.14815, 0.14915, 0.15015, 0.15115, 0.15215, 0.15315, 0.15415, 0.15516, 0.15616, 0.15716, 0.15816, 0.15916, 0.16016, 0.16116, 0.16216, 0.16316, 0.16416, 0.16517, 0.16617, 0.16717,
0.16817, 0.16917, 0.17017, 0.17117, 0.17217, 0.17317, 0.17417, 0.17518, 0.17618, 0.17718, 0.17818, 0.17918, 0.18018, 0.18118, 0.18218, 0.18318, 0.18418, 0.18519, 0.18619, 0.18719, 0.18819, 0.18919, 0.19019, 0.19119,
0.19219, 0.19319, 0.19419, 0.1952, 0.1962, 0.1972, 0.1982, 0.1992, 0.2002, 0.2012, 0.2022, 0.2032, 0.2042, 0.20521, 0.20621, 0.20721, 0.20821, 0.20921, 0.21021, 0.21121, 0.21221, 0.21321, 0.21421, 0.21522,
0.21622, 0.21722, 0.21822, 0.21922, 0.22022, 0.22122, 0.22222, 0.22322, 0.22422, 0.22523, 0.22623, 0.22723, 0.22823, 0.22923, 0.23023, 0.23123, 0.23223, 0.23323, 0.23423, 0.23524, 0.23624, 0.23724, 0.23824, 0.23924,
0.24024, 0.24124, 0.24224, 0.24324, 0.24424, 0.24525, 0.24625, 0.24725, 0.24825, 0.24925, 0.25025, 0.25125, 0.25225, 0.25325, 0.25425, 0.25526, 0.25626, 0.25726, 0.25826, 0.25926, 0.26026, 0.26126, 0.26226, 0.26326,
0.26426, 0.26527, 0.26627, 0.26727, 0.26827, 0.26927, 0.27027, 0.27127, 0.27227, 0.27327, 0.27427, 0.27528, 0.27628, 0.27728, 0.27828, 0.27928, 0.28028, 0.28128, 0.28228, 0.28328, 0.28428, 0.28529, 0.28629, 0.28729,
0.28829, 0.28929, 0.29029, 0.29129, 0.29229, 0.29329, 0.29429, 0.2953, 0.2963, 0.2973, 0.2983, 0.2993, 0.3003, 0.3013, 0.3023, 0.3033, 0.3043, 0.30531, 0.30631, 0.30731, 0.30831, 0.30931, 0.31031, 0.31131,
0.31231, 0.31331, 0.31431, 0.31532, 0.31632, 0.31732, 0.31832, 0.31932, 0.32032, 0.32132, 0.32232, 0.32332, 0.32432, 0.32533, 0.32633, 0.32733, 0.32833, 0.32933, 0.33033, 0.33133, 0.33233, 0.33333, 0.33433, 0.33534,
0.33634, 0.33734, 0.33834, 0.33934, 0.34034, 0.34134, 0.34234, 0.34334, 0.34434, 0.34535, 0.34635, 0.34735, 0.34835, 0.34935, 0.35035, 0.35135, 0.35235, 0.35335, 0.35435, 0.35536, 0.35636, 0.35736, 0.35836, 0.35936,
0.36036, 0.36136, 0.36236, 0.36336, 0.36436, 0.36537, 0.36637, 0.36737, 0.36837, 0.36937, 0.37037, 0.37137, 0.37237, 0.37337, 0.37437, 0.37538, 0.37638, 0.37738, 0.37838, 0.37938, 0.38038, 0.38138, 0.38238, 0.38338,
0.38438, 0.38539, 0.38639, 0.38739, 0.38839, 0.38939, 0.39039, 0.39139, 0.39239, 0.39339, 0.39439, 0.3954, 0.3964, 0.3974, 0.3984, 0.3994, 0.4004, 0.4014, 0.4024, 0.4034, 0.4044, 0.40541, 0.40641, 0.40741,
0.40841, 0.40941, 0.41041, 0.41141, 0.41241, 0.41341, 0.41441, 0.41542, 0.41642, 0.41742, 0.41842, 0.41942, 0.42042, 0.42142, 0.42242, 0.42342, 0.42442, 0.42543, 0.42643, 0.42743, 0.42843, 0.42943, 0.43043, 0.43143,
0.43243, 0.43343, 0.43443, 0.43544, 0.43644, 0.43744, 0.43844, 0.43944, 0.44044, 0.44144, 0.44244, 0.44344, 0.44444, 0.44545, 0.44645, 0.44745, 0.44845, 0.44945, 0.45045, 0.45145, 0.45245, 0.45345, 0.45445, 0.45546,
0.45646, 0.45746, 0.45846, 0.45946, 0.46046, 0.46146, 0.46246, 0.46346, 0.46446, 0.46547, 0.46647, 0.46747, 0.46847, 0.46947, 0.47047, 0.47147, 0.47247, 0.47347, 0.47447, 0.47548, 0.47648, 0.47748, 0.47848, 0.47948,
0.48048, 0.48148, 0.48248, 0.48348, 0.48448, 0.48549, 0.48649, 0.48749, 0.48849, 0.48949, 0.49049, 0.49149, 0.49249, 0.49349, 0.49449, 0.4955, 0.4965, 0.4975, 0.4985, 0.4995, 0.5005, 0.5015, 0.5025, 0.5035,
0.5045, 0.50551, 0.50651, 0.50751, 0.50851, 0.50951, 0.51051, 0.51151, 0.51251, 0.51351, 0.51451, 0.51552, 0.51652, 0.51752, 0.51852, 0.51952, 0.52052, 0.52152, 0.52252, 0.52352, 0.52452, 0.52553, 0.52653, 0.52753,
0.52853, 0.52953, 0.53053, 0.53153, 0.53253, 0.53353, 0.53453, 0.53554, 0.53654, 0.53754, 0.53854, 0.53954, 0.54054, 0.54154, 0.54254, 0.54354, 0.54454, 0.54555, 0.54655, 0.54755, 0.54855, 0.54955, 0.55055, 0.55155,
0.55255, 0.55355, 0.55455, 0.55556, 0.55656, 0.55756, 0.55856, 0.55956, 0.56056, 0.56156, 0.56256, 0.56356, 0.56456, 0.56557, 0.56657, 0.56757, 0.56857, 0.56957, 0.57057, 0.57157, 0.57257, 0.57357, 0.57457, 0.57558,
0.57658, 0.57758, 0.57858, 0.57958, 0.58058, 0.58158, 0.58258, 0.58358, 0.58458, 0.58559, 0.58659, 0.58759, 0.58859, 0.58959, 0.59059, 0.59159, 0.59259, 0.59359, 0.59459, 0.5956, 0.5966, 0.5976, 0.5986, 0.5996,
0.6006, 0.6016, 0.6026, 0.6036, 0.6046, 0.60561, 0.60661, 0.60761, 0.60861, 0.60961, 0.61061, 0.61161, 0.61261, 0.61361, 0.61461, 0.61562, 0.61662, 0.61762, 0.61862, 0.61962, 0.62062, 0.62162, 0.62262, 0.62362,
0.62462, 0.62563, 0.62663, 0.62763, 0.62863, 0.62963, 0.63063, 0.63163, 0.63263, 0.63363, 0.63463, 0.63564, 0.63664, 0.63764, 0.63864, 0.63964, 0.64064, 0.64164, 0.64264, 0.64364, 0.64464, 0.64565, 0.64665, 0.64765,
0.64865, 0.64965, 0.65065, 0.65165, 0.65265, 0.65365, 0.65465, 0.65566, 0.65666, 0.65766, 0.65866, 0.65966, 0.66066, 0.66166, 0.66266, 0.66366, 0.66466, 0.66567, 0.66667, 0.66767, 0.66867, 0.66967, 0.67067, 0.67167,
0.67267, 0.67367, 0.67467, 0.67568, 0.67668, 0.67768, 0.67868, 0.67968, 0.68068, 0.68168, 0.68268, 0.68368, 0.68468, 0.68569, 0.68669, 0.68769, 0.68869, 0.68969, 0.69069, 0.69169, 0.69269, 0.69369, 0.69469, 0.6957,
0.6967, 0.6977, 0.6987, 0.6997, 0.7007, 0.7017, 0.7027, 0.7037, 0.7047, 0.70571, 0.70671, 0.70771, 0.70871, 0.70971, 0.71071, 0.71171, 0.71271, 0.71371, 0.71471, 0.71572, 0.71672, 0.71772, 0.71872, 0.71972,
0.72072, 0.72172, 0.72272, 0.72372, 0.72472, 0.72573, 0.72673, 0.72773, 0.72873, 0.72973, 0.73073, 0.73173, 0.73273, 0.73373, 0.73473, 0.73574, 0.73674, 0.73774, 0.73874, 0.73974, 0.74074, 0.74174, 0.74274, 0.74374,
0.74474, 0.74575, 0.74675, 0.74775, 0.74875, 0.74975, 0.75075, 0.75175, 0.75275, 0.75375, 0.75475, 0.75576, 0.75676, 0.75776, 0.75876, 0.75976, 0.76076, 0.76176, 0.76276, 0.76376, 0.76476, 0.76577, 0.76677, 0.76777,
0.76877, 0.76977, 0.77077, 0.77177, 0.77277, 0.77377, 0.77477, 0.77578, 0.77678, 0.77778, 0.77878, 0.77978, 0.78078, 0.78178, 0.78278, 0.78378, 0.78478, 0.78579, 0.78679, 0.78779, 0.78879, 0.78979, 0.79079, 0.79179,
0.79279, 0.79379, 0.79479, 0.7958, 0.7968, 0.7978, 0.7988, 0.7998, 0.8008, 0.8018, 0.8028, 0.8038, 0.8048, 0.80581, 0.80681, 0.80781, 0.80881, 0.80981, 0.81081, 0.81181, 0.81281, 0.81381, 0.81481, 0.81582,
0.81682, 0.81782, 0.81882, 0.81982, 0.82082, 0.82182, 0.82282, 0.82382, 0.82482, 0.82583, 0.82683, 0.82783, 0.82883, 0.82983, 0.83083, 0.83183, 0.83283, 0.83383, 0.83483, 0.83584, 0.83684, 0.83784, 0.83884, 0.83984,
0.84084, 0.84184, 0.84284, 0.84384, 0.84484, 0.84585, 0.84685, 0.84785, 0.84885, 0.84985, 0.85085, 0.85185, 0.85285, 0.85385, 0.85485, 0.85586, 0.85686, 0.85786, 0.85886, 0.85986, 0.86086, 0.86186, 0.86286, 0.86386,
0.86486, 0.86587, 0.86687, 0.86787, 0.86887, 0.86987, 0.87087, 0.87187, 0.87287, 0.87387, 0.87487, 0.87588, 0.87688, 0.87788, 0.87888, 0.87988, 0.88088, 0.88188, 0.88288, 0.88388, 0.88488, 0.88589, 0.88689, 0.88789,
0.88889, 0.88989, 0.89089, 0.89189, 0.89289, 0.89389, 0.89489, 0.8959, 0.8969, 0.8979, 0.8989, 0.8999, 0.9009, 0.9019, 0.9029, 0.9039, 0.9049, 0.90591, 0.90691, 0.90791, 0.90891, 0.90991, 0.91091, 0.91191,
0.91291, 0.91391, 0.91491, 0.91592, 0.91692, 0.91792, 0.91892, 0.91992, 0.92092, 0.92192, 0.92292, 0.92392, 0.92492, 0.92593, 0.92693, 0.92793, 0.92893, 0.92993, 0.93093, 0.93193, 0.93293, 0.93393, 0.93493, 0.93594,
0.93694, 0.93794, 0.93894, 0.93994, 0.94094, 0.94194, 0.94294, 0.94394, 0.94494, 0.94595, 0.94695, 0.94795, 0.94895, 0.94995, 0.95095, 0.95195, 0.95295, 0.95395, 0.95495, 0.95596, 0.95696, 0.95796, 0.95896, 0.95996,
0.96096, 0.96196, 0.96296, 0.96396, 0.96496, 0.96597, 0.96697, 0.96797, 0.96897, 0.96997, 0.97097, 0.97197, 0.97297, 0.97397, 0.97497, 0.97598, 0.97698, 0.97798, 0.97898, 0.97998, 0.98098, 0.98198, 0.98298, 0.98398,
0.98498, 0.98599, 0.98699, 0.98799, 0.98899, 0.98999, 0.99099, 0.99199, 0.99299, 0.99399, 0.99499, 0.996, 0.997, 0.998, 0.999, 1]), array([[ 1, 1, 1, ..., 0, 0, 0],
[ 1, 1, 1, ..., 0, 0, 0],
[ 1, 1, 1, ..., 0, 0, 0],
...,
[ 1, 1, 1, ..., 0, 0, 0],
[ 0.98077, 0.98077, 0.98077, ..., 0, 0, 0],
[ 1, 1, 1, ..., 0, 0, 0]]), 'Confidence', 'Recall']]
fitness: 0.9164652820108448
keys: ['metrics/precision(B)', 'metrics/recall(B)', 'metrics/mAP50(B)', 'metrics/mAP50-95(B)']
maps: array([ 0.93024, 0.93448, 0.91009, 0.94852, 0.94384, 0.95948, 0.93529, 0.8955, 0.92697, 0.89569, 0.91181, 0.9311, 0.94078, 0.93013, 0.88314, 0.94536, 0.87169, 0.90358, 0.92418, 0.94499, 0.9517, 0.96814, 0.82415, 0.89556,
0.84208, 0.90063, 0.92339, 0.94549, 0.92146, 0.9248, 0.7969, 0.6965])
names: {0: 'tooth0', 1: 'tooth1', 2: 'tooth2', 3: 'tooth3', 4: 'tooth4', 5: 'tooth5', 6: 'tooth6', 7: 'tooth7', 8: 'tooth8', 9: 'tooth9', 10: 'tooth10', 11: 'tooth11', 12: 'tooth12', 13: 'tooth13', 14: 'tooth14', 15: 'tooth15', 16: 'tooth16', 17: 'tooth17', 18: 'tooth18', 19: 'tooth19', 20: 'tooth20', 21: 'tooth21', 22: 'tooth22', 23: 'tooth23', 24: 'tooth24', 25: 'tooth25', 26: 'tooth26', 27: 'tooth27', 28: 'tooth28', 29: 'tooth29', 30: 'tooth30', 31: 'tooth31'}
plot: True
results_dict: {'metrics/precision(B)': 0.9576900497349794, 'metrics/recall(B)': 0.9458879351613054, 'metrics/mAP50(B)': 0.9921855775825806, 'metrics/mAP50-95(B)': 0.9080519158362075, 'fitness': 0.9164652820108448}
save_dir: PosixPath('runs/detect/Arm_Yolo_30')
speed: {'preprocess': 0.1045385996500651, 'inference': 1.5512446562449136, 'loss': 0.0003337860107421875, 'postprocess': 1.3076245784759521}
task: 'detect'
sample_submission=pd.read_csv('/kaggle/input/SampleSubmission (3).csv')
import json
def get_prediction(model, image_id, phase="val"):
if phase == "val":
path = '/kaggle/input/test (1)/test'
else:
path = '/kaggle/input/train (1)/train'
pred = model.predict(os.path.join(path,image_id+'.png'), imgsz=512, conf=0.1, augment=False, agnostic_nms=False)
pred = json.loads(pred[0].tojson())
return pred
def f(x):
if x in(range(0,8)):
return int(x+11)
if x in(range(8,16)):
return int(x+13)
if x in(range(16,24)):
return int(x+15)
if x in(range(24,32)):
return int(x+17)
else:return ' '
def generate_prediction(model, ss):
unique_imgs = ss["Image_ID"].unique()
transformed_pred = []
for image_id in tqdm(unique_imgs, total=len(unique_imgs), desc="Generaing predictions"):
pred = get_prediction(model, image_id, phase="val")
for item in pred:
transformed_pred.append({
'Image_ID': image_id,
'ToothClass': f(int(item['name'][5:])),
'confidence': item['confidence'],
'Xmin': item['box']['x1'],
'Ymin': item['box']['y1'],
'Xmax': item['box']['x2'],
'Ymax': item['box']['y2']
})
transformed_pred = pd.DataFrame(transformed_pred)
return transformed_pred
test=pd.read_csv('/kaggle/input/Test (13).csv')
transformed_pred=generate_prediction(model,test)
Generaing predictions: 100%|██████████| 600/600 [00:16<00:00, 36.29it/s]
transformed_pred["ToothClass"] = "class_" + transformed_pred["ToothClass"].astype(str)
transformed_pred.columns = ["Image_ID", "class", "confidence", "xmin", "ymin", "xmax", "ymax"]
transformed_pred
| Image_ID | class | confidence | xmin | ymin | xmax | ymax | |
|---|---|---|---|---|---|---|---|
| 0 | ID_001200 | class_35 | 0.94939 | 344.53448 | 228.70889 | 396.51727 | 279.23587 |
| 1 | ID_001200 | class_46 | 0.93689 | 82.71478 | 160.39441 | 156.75211 | 241.95648 |
| 2 | ID_001200 | class_47 | 0.93645 | 61.27040 | 104.65893 | 128.32320 | 172.63263 |
| 3 | ID_001200 | class_36 | 0.93530 | 350.89240 | 157.68445 | 421.90753 | 237.52722 |
| 4 | ID_001200 | class_44 | 0.93490 | 131.95337 | 272.65680 | 180.60272 | 321.44830 |
| ... | ... | ... | ... | ... | ... | ... | ... |
| 8375 | ID_001799 | class_14 | 0.85651 | 313.80225 | 272.86542 | 373.51965 | 325.82635 |
| 8376 | ID_001799 | class_15 | 0.84385 | 334.92413 | 230.42412 | 395.79706 | 284.67377 |
| 8377 | ID_001799 | class_12 | 0.82248 | 291.01959 | 315.86652 | 342.56622 | 365.41693 |
| 8378 | ID_001799 | class_11 | 0.68745 | 253.75346 | 341.51059 | 311.87579 | 395.75760 |
| 8379 | ID_001799 | class_14 | 0.15729 | 334.34015 | 230.59062 | 395.34357 | 284.69769 |
8380 rows × 7 columns
transformed_pred['class'].value_counts()
class class_26 320 class_22 319 class_14 316 class_36 313 class_42 312 class_41 311 class_46 310 class_32 309 class_16 308 class_45 308 class_44 307 class_11 307 class_31 307 class_21 305 class_15 305 class_43 304 class_33 304 class_12 304 class_24 303 class_35 303 class_34 301 class_25 301 class_13 296 class_23 294 class_37 262 class_47 245 class_27 224 class_17 217 class_18 18 class_48 18 class_38 15 class_28 14 Name: count, dtype: int64
transformed_pred.to_csv('submission-conf0.1.csv',index=False)
transformed_pred['class'].value_counts()
class class_26 320 class_22 319 class_14 316 class_36 313 class_42 312 class_41 311 class_46 310 class_32 309 class_16 308 class_45 308 class_44 307 class_11 307 class_31 307 class_21 305 class_15 305 class_43 304 class_33 304 class_12 304 class_24 303 class_35 303 class_34 301 class_25 301 class_13 296 class_23 294 class_37 262 class_47 245 class_27 224 class_17 217 class_18 18 class_48 18 class_38 15 class_28 14 Name: count, dtype: int64